In [1]:
import json
import copy
import time
import random
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
import matplotlib.pyplot as plt
from matplotlib import pyplot as plt
from torchsummary import summary
In [2]:
from nmfd_gnn import NMFD_GNN

1: set parameters¶

In [3]:
print (torch.cuda.is_available())
device = torch.device("cuda:0")
random_seed = 42
random.seed(random_seed)
torch.manual_seed(random_seed)
torch.cuda.manual_seed(random_seed)
r = random.random
True
In [4]:
#1.1: settings
M = 20                       #number of time interval in a window
missing_ratio = 0.50
file_name = "m_" + str(M) + "_missing_" + str(int(missing_ratio*100))
print (file_name)

#1.2: hyperparameters
num_epochs, batch_size, learning_rate = 200, 16, 0.001
beta_flow, beta_occ, beta_phy = 1.0, 1.0, 0.1
batch_size_vt = 16  #batch size for evaluation and test
delta_ratio = 0.1   #the ratio of delta in the standard deviation of flow

hyper = {"n_e": num_epochs, "b_s": batch_size, "b_s_vt": batch_size_vt, "l_r": learning_rate,\
         "beta_f": beta_flow, "beta_o": beta_occ, "beta_p": beta_phy, "delta_ratio": delta_ratio}

gnn_dim_1, gnn_dim_2, gnn_dim_3, lstm_dim = 2, 128, 128, 128
p_dim = 10    #column dimension of L1, L2
c_k = 5.5     #meter, the sum of loop width and uniform vehicle length. based on Gero and Daganzo 2008.
theta_ini = [-2.757, 4.996, -2.409, 1.638, 3.569] 

hyper_model = {"g_dim_1": gnn_dim_1, "g_dim_2": gnn_dim_2, "g_dim_3": gnn_dim_3, "l_dim": lstm_dim,\
               "p_dim": p_dim, "c_k": c_k, "theta_ini": theta_ini}
max_no_decrease = 30

#1.3: set paths
root_path = "/home/umni2/a/umnilab/users/xue120/umni4/2023_mfd_traffic/"
file_path = root_path + "2_prepare_data/" + file_name + "/"
train_path, vali_path, test_path =\
    file_path + "train.json", file_path + "vali.json", file_path + "test.json"
sensor_id_path = file_path + "sensor_id_order.json"
sensor_adj_path = file_path + "sensor_adj.json"
mean_std_path = file_path + "mean_std.json"
m_20_missing_50

2: visualization¶

In [5]:
def visualize_train_loss(total_phy_flow_occ_loss):
    plt.figure(figsize=(4,3), dpi=75)
    t_p_f_o_l = np.array(total_phy_flow_occ_loss)
    e_loss, p_loss, f_loss, o_loss = t_p_f_o_l[:,0], t_p_f_o_l[:,1], t_p_f_o_l[:,2], t_p_f_o_l[:,3]
    x = range(len(e_loss))
    plt.plot(x, p_loss, linewidth=1, label = "phy loss")
    plt.plot(x, f_loss, linewidth=1, label = "flow loss")
    plt.plot(x, o_loss, linewidth=1, label = "occ loss")
    plt.legend()
    plt.title('Loss decline on train')
    plt.xlabel('Epoch')
    plt.ylabel('Loss')
    plt.savefig(file_name + '/' + 'train_loss.png', bbox_inches = 'tight')
    plt.show()
    
def visualize_flow_loss(vali_f_mae, test_f_mae):
    plt.figure(figsize=(4,3), dpi=75)
    x = range(len(vali_f_mae))    
    plt.plot(x, vali_f_mae, linewidth=1, label="Validate")
    plt.plot(x, test_f_mae, linewidth=1, label="Test")
    plt.legend()
    plt.title('MAE of flow on validate/test')
    plt.xlabel('Epoch')
    plt.ylabel('MAE (veh/h)')
    plt.savefig(file_name + '/' + 'flow_mae.png', bbox_inches = 'tight')
    plt.show()
    
def visualize_occ_loss(vali_o_mae, test_o_mae):
    plt.figure(figsize=(4,3), dpi=75)
    x = range(len(vali_o_mae))    
    plt.plot(x, vali_o_mae, linewidth=1, label="Validate")
    plt.plot(x, test_o_mae, linewidth=1, label="Test")
    plt.legend()
    plt.title('MAE of occupancy on validate/test')
    plt.xlabel('Epoch')
    plt.ylabel('MAE')
    plt.savefig(file_name + '/' + 'occ_mae.png',bbox_inches = 'tight')
    plt.show()

3: compute the error¶

In [6]:
def MAELoss(yhat, y):
    return float(torch.mean(torch.div(torch.abs(yhat-y), 1)))

def RMSELoss(yhat, y):
    return float(torch.sqrt(torch.mean((yhat-y)**2)))

def vali_test(model, f, f_mask, o, o_mask, f_o_mean_std, b_s_vt):    
    flow_std, occ_std, n = f_o_mean_std[1], f_o_mean_std[3], len(f)
    f_mae_list, f_rmse_list, o_mae_list, o_rmse_list, num_list = list(), list(), list(), list(), list()
    for i in range(0, n, b_s_vt):
        s, e = i, np.min([i+b_s_vt, n])
        num_list.append(e-s)
        bf, bo, bf_mask, bo_mask = f[s: e], o[s: e], f_mask[s: e], o_mask[s: e]  
        bf_hat, bo_hat, bq_hat, bq_theta = model.run(bf_mask, bo_mask)
        bf_hat, bo_hat = bf_hat.cpu(), bo_hat.cpu()
        bf_mae, bf_rmse = MAELoss(bf_hat, bf)*flow_std, RMSELoss(bf_hat, bf)*flow_std
        bo_mae, bo_rmse = MAELoss(bo_hat, bo)*occ_std, RMSELoss(bo_hat, bo)*occ_std
        f_mae_list.append(bf_mae)
        f_rmse_list.append(bf_rmse)
        o_mae_list.append(bo_mae)
        o_rmse_list.append(bo_rmse)
    f_mae, o_mae = np.dot(f_mae_list, num_list)/n, np.dot(o_mae_list, num_list)/n
    f_rmse = np.sqrt(np.dot(np.multiply(f_rmse_list, f_rmse_list), num_list)/n)
    o_rmse = np.sqrt(np.dot(np.multiply(o_rmse_list, o_rmse_list), num_list)/n)
    return f_mae, f_rmse, o_mae, o_rmse

def evaluate(model, vt_f, vt_o, vt_f_m, vt_o_m, f_o_mean_std, b_s_vt): #vt: vali_test
    vt_f_mae, vt_f_rmse, vt_o_mae, vt_o_rmse  =\
         vali_test(model, vt_f, vt_f_m, vt_o, vt_o_m, f_o_mean_std, b_s_vt)
    return vt_f_mae, vt_f_rmse, vt_o_mae, vt_o_rmse

4: train¶

In [7]:
import torch
In [8]:
#4.1: one training epoch
def train_epoch(model, opt, criterion, train_f_x, train_f_y, train_o_x, train_o_y, hyper, flow_std_squ, delta): 
    #f: flow; o: occupancy
    model.train()
    losses, p_losses, f_losses, o_losses = list(), list(), list(), list()
    
    beta_f, beta_o, beta_p, b_s = hyper["beta_f"], hyper["beta_o"], hyper["beta_p"], hyper["b_s"]
    n = len(train_f_x)
    print ("# batch: ", int(n/b_s))   
    
    for i in range(0, n-b_s, b_s):
        time1 = time.time()
        x_f_batch, y_f_batch = train_f_x[i: i+b_s], train_f_y[i: i+b_s]   
        x_o_batch, y_o_batch = train_o_x[i: i+b_s], train_o_y[i: i+b_s]

        opt.zero_grad() 
        y_f_hat, y_o_hat, q_hat, q_theta = model.run(x_f_batch, x_o_batch)
        
        #p_loss = criterion(q_hat, q_theta).cpu()                #physical loss 
        #p_loss = p_loss/flow_std_squ
        
        #hinge loss
        q_gap = q_hat - q_theta       
        delta_gap = torch.ones(q_gap.shape, device=device)*delta
        zero_gap = torch.zeros(q_gap.shape, device=device)            #(n, m)
        hl_loss = torch.max(q_gap-delta_gap, zero_gap) + torch.max(-delta_gap-q_gap, zero_gap) 
        hl_loss = hl_loss/flow_std_squ
        p_loss = criterion(hl_loss, zero_gap).cpu()            #(n, m)
        f_loss = criterion(y_f_hat.cpu(), y_f_batch)              #data loss of flow
        o_loss = criterion(y_o_hat.cpu(), y_o_batch)              #data loss of occupancy
        
        loss = beta_f*f_loss + beta_o*o_loss + beta_p*p_loss
        
        loss.backward()
        opt.step()
        losses.append(loss.data.numpy())
        p_losses.append(p_loss.data.numpy())
        f_losses.append(f_loss.data.numpy())
        o_losses.append(o_loss.data.numpy())
        
        if i % (64*b_s) == 0:
            print ("i_batch: ", i/b_s)
            print ("the loss for this batch: ", loss.data.numpy())
            print ("flow loss", f_loss.data.numpy())
            print ("occ loss", o_loss.data.numpy())
            time2 = time.time()
            print ("time for this batch", time2-time1)
            print ("----------------------------------")
        n_loss = float(len(losses)+0.000001)
        aver_loss = sum(losses)/n_loss
        aver_p_loss = sum(p_losses)/n_loss
        aver_f_loss = sum(f_losses)/n_loss
        aver_o_loss = sum(o_losses)/n_loss
    return aver_loss, model, aver_p_loss, aver_f_loss, aver_o_loss

#4.2: all train epochs
def train_process(model, criterion, train, vali, test, hyper, f_o_mean_std):
    total_phy_flow_occ_loss = list()
    
    n_mse_flow_occ = 0 #mse(flow) + mse(occ) for validation sets.
    f_std = f_o_mean_std[1]
    
    vali_f, vali_o = vali["flow"], vali["occupancy"] 
    vali_f_m, vali_o_m = vali["flow_mask"].to(device), vali["occupancy_mask"].to(device) 
    test_f, test_o = test["flow"], test["occupancy"] 
    test_f_m, test_o_m = test["flow_mask"].to(device), test["occupancy_mask"].to(device) 
    
    l_r, n_e = hyper["l_r"], hyper["n_e"]
    opt = optim.Adam(model.parameters(), l_r, betas = (0.9,0.999), weight_decay=0.0001)
    opt_scheduler = torch.optim.lr_scheduler.MultiStepLR(opt, milestones=[150])
    
    print ("# epochs ", n_e)
    r_vali_f_mae, r_vali_o_mae, r_test_f_mae, r_test_o_mae = list(), list(), list(), list()
    r_vali_f_rmse, r_vali_o_rmse, r_test_f_rmse, r_test_o_rmse = list(), list(), list(), list()
    
    flow_std_squ = np.power(f_std, 2)
    
    no_decrease = 0
    for i in range(n_e):
        print ("----------------an epoch starts-------------------")
        #time1_s = time.time()
        
        time_s = time.time()
        print ("i_epoch: ", i)
        n_train = len(train["flow"])
        number_list = copy.copy(list(range(n_train)))
        random.shuffle(number_list, random = r)
        shuffle_idx = torch.tensor(number_list)
        train_x_f, train_y_f = train["flow_mask"][shuffle_idx], train["flow"][shuffle_idx]
        train_x_o, train_y_o = train["occupancy_mask"][shuffle_idx], train["occupancy"][shuffle_idx] 
        
        delta = hyper["delta_ratio"] * f_std
        aver_loss, model, aver_p_loss, aver_f_loss, aver_o_loss =\
            train_epoch(model, opt, criterion, train_x_f.to(device), train_y_f,\
                        train_x_o.to(device), train_y_o, hyper, flow_std_squ, delta)
        opt_scheduler.step()
        
        total_phy_flow_occ_loss.append([aver_loss, aver_p_loss, aver_f_loss, aver_o_loss])
        print ("train loss for this epoch: ", round(aver_loss, 6))
        
        #evaluate
        b_s_vt = hyper["b_s_vt"]
        vali_f_mae, vali_f_rmse, vali_o_mae, vali_o_rmse =\
            evaluate(model, vali_f, vali_o, vali_f_m, vali_o_m, f_o_mean_std, b_s_vt)
        test_f_mae, test_f_rmse, test_o_mae, test_o_rmse =\
            evaluate(model, test_f, test_o, test_f_m, test_o_m, f_o_mean_std, b_s_vt)  
        
        r_vali_f_mae.append(vali_f_mae)
        r_test_f_mae.append(test_f_mae)
        r_vali_o_mae.append(vali_o_mae)
        r_test_o_mae.append(test_o_mae)
        r_vali_f_rmse.append(vali_f_rmse)
        r_test_f_rmse.append(test_f_rmse)
        r_vali_o_rmse.append(vali_o_rmse)
        r_test_o_rmse.append(test_o_rmse)
        
        visualize_train_loss(total_phy_flow_occ_loss)
        visualize_flow_loss(r_vali_f_mae, r_test_f_mae)
        visualize_occ_loss(r_vali_o_mae, r_test_o_mae)
        time_e = time.time()
        print ("time for this epoch", time_e - time_s)
        
        performance = {"train": total_phy_flow_occ_loss,\
                  "vali": [r_vali_f_mae, r_vali_f_rmse, r_vali_o_mae, r_vali_o_rmse],\
                  "test": [r_test_f_mae, r_test_f_rmse, r_test_o_mae, r_test_o_rmse]}
        subfile =  open(file_name + '/' + 'performance'+'.json','w')
        json.dump(performance, subfile)
        subfile.close()
        
        #early stop
        flow_std, occ_std = f_o_mean_std[1], f_o_mean_std[3]
        norm_f_rmse, norm_o_rmse = vali_f_rmse/flow_std, vali_o_rmse/occ_std
        norm_sum_mse = norm_f_rmse*norm_f_rmse + norm_o_rmse*norm_o_rmse
        
        if n_mse_flow_occ > 0:
            min_until_now = min([min_until_now, norm_sum_mse])
        else:
            min_until_now = 1000000.0  
        if norm_sum_mse > min_until_now:
            no_decrease = no_decrease+1
        else:
            no_decrease = 0
        if no_decrease == max_no_decrease:
            print ("Early stop at the " + str(i+1) + "-th epoch")
            return total_phy_flow_occ_loss, model 
        n_mse_flow_occ = n_mse_flow_occ + 1
        
        print ("No_decrease: ", no_decrease)
    return total_phy_flow_occ_loss, model    

5: prepare tensors¶

In [9]:
def tensorize(train_vali_test):
    result = dict()
    result["flow"] = torch.tensor(train_vali_test["flow"]) 
    result["flow_mask"] = torch.tensor(train_vali_test["flow_mask"])     
    result["occupancy"] = torch.tensor(train_vali_test["occupancy"]) 
    result["occupancy_mask"] = torch.tensor(train_vali_test["occupancy_mask"]) 
    return result

def normalize_flow_occ(tvt, f_o_mean_std):  #tvt: train, vali, test
    #flow
    f_mean, f_std = f_o_mean_std[0], f_o_mean_std[1]
    f_mask, f = tvt["flow_mask"], tvt["flow"]
    tvt["flow_mask"] = ((np.array(f_mask)-f_mean)/f_std).tolist()
    tvt["flow"] = ((np.array(f)-f_mean)/f_std).tolist()
    
    #occ
    o_mean, o_std = f_o_mean_std[2], f_o_mean_std[3]
    o_mask, o = tvt["occupancy_mask"], tvt["occupancy"]
    tvt["occupancy_mask"] = ((np.array(o_mask)-o_mean)/o_std).tolist()
    tvt["occupancy"] = ((np.array(o)-o_mean)/o_std).tolist()   
    return tvt

def transform_distance(d_matrix):
    sigma, n_row, n_col = np.std(d_matrix), len(d_matrix), len(d_matrix[0])
    sigma_square = sigma*sigma
    for i in range(n_row):
        for j in range(n_col):
            d_i_j = d_matrix[i][j]
            d_matrix[i][j] = np.exp(0.0-10000.0*d_i_j*d_i_j/sigma_square)
    return d_matrix

def load_data(train_path, vali_path, test_path, sensor_adj_path, mean_std_path, sensor_id_path):
    mean_std = json.load(open(mean_std_path))
    f_mean, f_std, o_mean, o_std =\
        mean_std["f_mean"], mean_std["f_std"], mean_std["o_mean"], mean_std["o_std"]
    f_o_mean_std = [f_mean, f_std, o_mean, o_std]
    
    train = json.load(open(train_path))
    vali = json.load(open(vali_path))
    test = json.load(open(test_path))
    adj = json.load(open(sensor_adj_path))["adj"]
    n_sensor = len(train["flow"][0])    
    
    train = tensorize(normalize_flow_occ(train, f_o_mean_std))
    vali = tensorize(normalize_flow_occ(vali, f_o_mean_std))
    test = tensorize(normalize_flow_occ(test, f_o_mean_std))

    adj = torch.tensor(transform_distance(adj), device=device).float()   
    
    df_sensor_id = json.load(open(sensor_id_path))
    sensor_length = [0.0 for i in range(n_sensor)]
    for sensor in df_sensor_id:
        sensor_length[df_sensor_id[sensor][0]] = df_sensor_id[sensor][3]
        
    return train, vali, test, adj, n_sensor, f_o_mean_std, sensor_length

6: main¶

In [10]:
#6.1 load the data
time1 = time.time()
train, vali, test, adj, n_sensor, f_o_mean_std, sensor_length =\
    load_data(train_path, vali_path, test_path, sensor_adj_path, mean_std_path, sensor_id_path)
time2 = time.time()
print (time2-time1)
18.573214054107666
In [11]:
print (len(train["flow"]))
print (len(vali["flow"]))
print (len(test["flow"]))
print (f_o_mean_std)
1997
653
653
[241.21586152814126, 220.92336003653475, 0.13805152810287494, 0.1920120065038222]
In [12]:
model = NMFD_GNN(n_sensor, M, hyper_model, f_o_mean_std, sensor_length, adj).to(device)   
cri = nn.MSELoss() 
In [13]:
#6.2: train the model
total_phy_flow_occ_loss, trained_model = train_process(model, cri, train, vali, test, hyper, f_o_mean_std)
# epochs  200
----------------an epoch starts-------------------
i_epoch:  0
# batch:  124
i_batch:  0.0
the loss for this batch:  1.71809
flow loss 0.89621675
occ loss 0.82187146
time for this batch 0.6436805725097656
----------------------------------
i_batch:  64.0
the loss for this batch:  0.45944092
flow loss 0.16827554
occ loss 0.29116258
time for this batch 0.36271119117736816
----------------------------------
train loss for this epoch:  0.591072
time for this epoch 56.651963233947754
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  1
# batch:  124
i_batch:  0.0
the loss for this batch:  0.42612982
flow loss 0.14691354
occ loss 0.279213
time for this batch 0.3153653144836426
----------------------------------
i_batch:  64.0
the loss for this batch:  0.47933322
flow loss 0.15097466
occ loss 0.32835498
time for this batch 0.38279104232788086
----------------------------------
train loss for this epoch:  0.375155
time for this epoch 57.072731018066406
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  2
# batch:  124
i_batch:  0.0
the loss for this batch:  0.27988362
flow loss 0.09501598
occ loss 0.18486524
time for this batch 0.37389445304870605
----------------------------------
i_batch:  64.0
the loss for this batch:  0.32103604
flow loss 0.10087322
occ loss 0.22015983
time for this batch 0.4050748348236084
----------------------------------
train loss for this epoch:  0.336523
time for this epoch 62.66493630409241
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  3
# batch:  124
i_batch:  0.0
the loss for this batch:  0.33583727
flow loss 0.10371247
occ loss 0.2321219
time for this batch 0.33260416984558105
----------------------------------
i_batch:  64.0
the loss for this batch:  0.34449634
flow loss 0.09586112
occ loss 0.24863279
time for this batch 0.43418192863464355
----------------------------------
train loss for this epoch:  0.316722
time for this epoch 63.27169585227966
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  4
# batch:  124
i_batch:  0.0
the loss for this batch:  0.36231226
flow loss 0.10087846
occ loss 0.2614305
time for this batch 0.3613440990447998
----------------------------------
i_batch:  64.0
the loss for this batch:  0.29859093
flow loss 0.085553914
occ loss 0.21303426
time for this batch 0.43465113639831543
----------------------------------
train loss for this epoch:  0.305296
time for this epoch 69.3442931175232
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  5
# batch:  124
i_batch:  0.0
the loss for this batch:  0.3121631
flow loss 0.091825366
occ loss 0.22033463
time for this batch 0.38411378860473633
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19562195
flow loss 0.06497544
occ loss 0.1306447
time for this batch 0.42182183265686035
----------------------------------
train loss for this epoch:  0.297636
time for this epoch 66.75419640541077
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  6
# batch:  124
i_batch:  0.0
the loss for this batch:  0.25688508
flow loss 0.08622245
occ loss 0.17065994
time for this batch 0.365706205368042
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2336505
flow loss 0.074063204
occ loss 0.159585
time for this batch 0.4120488166809082
----------------------------------
train loss for this epoch:  0.290509
time for this epoch 63.651856899261475
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  7
# batch:  124
i_batch:  0.0
the loss for this batch:  0.22877006
flow loss 0.066490635
occ loss 0.16227767
time for this batch 0.33281397819519043
----------------------------------
i_batch:  64.0
the loss for this batch:  0.29710495
flow loss 0.08541275
occ loss 0.21168904
time for this batch 0.42058801651000977
----------------------------------
train loss for this epoch:  0.285314
time for this epoch 64.46010613441467
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  8
# batch:  124
i_batch:  0.0
the loss for this batch:  0.33263186
flow loss 0.09582901
occ loss 0.23679972
time for this batch 0.3671262264251709
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23194857
flow loss 0.06785423
occ loss 0.164092
time for this batch 0.42928004264831543
----------------------------------
train loss for this epoch:  0.2799
time for this epoch 63.935362339019775
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  9
# batch:  124
i_batch:  0.0
the loss for this batch:  0.3675995
flow loss 0.09699231
occ loss 0.2706036
time for this batch 0.3756434917449951
----------------------------------
i_batch:  64.0
the loss for this batch:  0.33954108
flow loss 0.09046857
occ loss 0.24906915
time for this batch 0.4548826217651367
----------------------------------
train loss for this epoch:  0.275519
time for this epoch 64.61618542671204
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  10
# batch:  124
i_batch:  0.0
the loss for this batch:  0.21049699
flow loss 0.0649654
occ loss 0.14552923
time for this batch 0.3508491516113281
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22128566
flow loss 0.06449693
occ loss 0.15678622
time for this batch 0.427565336227417
----------------------------------
train loss for this epoch:  0.27193
time for this epoch 64.63073778152466
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  11
# batch:  124
i_batch:  0.0
the loss for this batch:  0.27655515
flow loss 0.07986489
occ loss 0.196687
time for this batch 0.38117480278015137
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18558463
flow loss 0.0587819
occ loss 0.12680069
time for this batch 0.416989803314209
----------------------------------
train loss for this epoch:  0.26844
time for this epoch 65.28579902648926
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  12
# batch:  124
i_batch:  0.0
the loss for this batch:  0.25190794
flow loss 0.071572185
occ loss 0.18033287
time for this batch 0.3795349597930908
----------------------------------
i_batch:  64.0
the loss for this batch:  0.3218456
flow loss 0.08497126
occ loss 0.23687083
time for this batch 0.47234296798706055
----------------------------------
train loss for this epoch:  0.264991
time for this epoch 64.894700050354
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  13
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23337115
flow loss 0.07406184
occ loss 0.15930673
time for this batch 0.39777088165283203
----------------------------------
i_batch:  64.0
the loss for this batch:  0.30133158
flow loss 0.086550094
occ loss 0.21477836
time for this batch 0.40256643295288086
----------------------------------
train loss for this epoch:  0.263387
time for this epoch 66.8178186416626
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  14
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2866698
flow loss 0.08193627
occ loss 0.20473032
time for this batch 0.39766716957092285
----------------------------------
i_batch:  64.0
the loss for this batch:  0.32276452
flow loss 0.086743414
occ loss 0.23601785
time for this batch 0.44763636589050293
----------------------------------
train loss for this epoch:  0.259706
time for this epoch 66.70117354393005
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  15
# batch:  124
i_batch:  0.0
the loss for this batch:  0.28736165
flow loss 0.07667008
occ loss 0.21068843
time for this batch 0.3534109592437744
----------------------------------
i_batch:  64.0
the loss for this batch:  0.26993945
flow loss 0.076713435
occ loss 0.19322322
time for this batch 0.37622642517089844
----------------------------------
train loss for this epoch:  0.258619
time for this epoch 59.13344979286194
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  16
# batch:  124
i_batch:  0.0
the loss for this batch:  0.26580352
flow loss 0.074383475
occ loss 0.19141677
time for this batch 0.3821907043457031
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22406171
flow loss 0.07126738
occ loss 0.15279196
time for this batch 0.3783698081970215
----------------------------------
train loss for this epoch:  0.257553
time for this epoch 64.65511655807495
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  17
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20515211
flow loss 0.063593164
occ loss 0.14155681
time for this batch 0.3908071517944336
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22633478
flow loss 0.061244547
occ loss 0.16508782
time for this batch 0.43149352073669434
----------------------------------
train loss for this epoch:  0.254198
time for this epoch 63.884684324264526
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  18
# batch:  124
i_batch:  0.0
the loss for this batch:  0.25871336
flow loss 0.071762316
occ loss 0.18694781
time for this batch 0.35426783561706543
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22252224
flow loss 0.06625383
occ loss 0.15626566
time for this batch 0.4129810333251953
----------------------------------
train loss for this epoch:  0.253095
time for this epoch 64.77478098869324
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  19
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20951225
flow loss 0.059287425
occ loss 0.15022269
time for this batch 0.34084296226501465
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2416055
flow loss 0.06398504
occ loss 0.17761824
time for this batch 0.37480807304382324
----------------------------------
train loss for this epoch:  0.252203
time for this epoch 66.32376456260681
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  20
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2588602
flow loss 0.06510005
occ loss 0.19375746
time for this batch 0.36916685104370117
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2182679
flow loss 0.06336883
occ loss 0.15489665
time for this batch 0.428253173828125
----------------------------------
train loss for this epoch:  0.250537
time for this epoch 64.74082684516907
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  21
# batch:  124
i_batch:  0.0
the loss for this batch:  0.16020529
flow loss 0.052411098
occ loss 0.10779229
time for this batch 0.3497593402862549
----------------------------------
i_batch:  64.0
the loss for this batch:  0.28386623
flow loss 0.078991756
occ loss 0.20487136
time for this batch 0.4232602119445801
----------------------------------
train loss for this epoch:  0.250272
time for this epoch 63.380616664886475
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  22
# batch:  124
i_batch:  0.0
the loss for this batch:  0.34749427
flow loss 0.088564806
occ loss 0.2589261
time for this batch 0.35246753692626953
----------------------------------
i_batch:  64.0
the loss for this batch:  0.29780117
flow loss 0.08107718
occ loss 0.21672037
time for this batch 0.4185497760772705
----------------------------------
train loss for this epoch:  0.247475
time for this epoch 65.54675459861755
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  23
# batch:  124
i_batch:  0.0
the loss for this batch:  0.26502445
flow loss 0.071315244
occ loss 0.19370604
time for this batch 0.35578322410583496
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20670769
flow loss 0.059279762
occ loss 0.14742517
time for this batch 0.415050745010376
----------------------------------
train loss for this epoch:  0.249433
time for this epoch 63.95142149925232
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  24
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24722666
flow loss 0.07072515
occ loss 0.17649859
time for this batch 0.36197757720947266
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19174835
flow loss 0.057436552
occ loss 0.13430956
time for this batch 0.39067864418029785
----------------------------------
train loss for this epoch:  0.243871
time for this epoch 64.17225384712219
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  25
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23850851
flow loss 0.06426033
occ loss 0.17424552
time for this batch 0.3729565143585205
----------------------------------
i_batch:  64.0
the loss for this batch:  0.26258808
flow loss 0.06753069
occ loss 0.19505489
time for this batch 0.3800477981567383
----------------------------------
train loss for this epoch:  0.242987
time for this epoch 64.89483594894409
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  26
# batch:  124
i_batch:  0.0
the loss for this batch:  0.35642883
flow loss 0.08662004
occ loss 0.26980442
time for this batch 0.39039015769958496
----------------------------------
i_batch:  64.0
the loss for this batch:  0.29992503
flow loss 0.083970256
occ loss 0.21595122
time for this batch 0.41928791999816895
----------------------------------
train loss for this epoch:  0.242851
time for this epoch 64.83832621574402
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  27
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20987357
flow loss 0.0572135
occ loss 0.15265809
time for this batch 0.42040109634399414
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2714127
flow loss 0.07277812
occ loss 0.1986315
time for this batch 0.4040837287902832
----------------------------------
train loss for this epoch:  0.242959
time for this epoch 69.0268759727478
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  28
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23782761
flow loss 0.06516386
occ loss 0.1726609
time for this batch 0.3793373107910156
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24476127
flow loss 0.0650164
occ loss 0.17974208
time for this batch 0.4823179244995117
----------------------------------
train loss for this epoch:  0.24135
time for this epoch 67.80516147613525
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  29
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2292964
flow loss 0.06340987
occ loss 0.16588354
time for this batch 0.3610537052154541
----------------------------------
i_batch:  64.0
the loss for this batch:  0.3239938
flow loss 0.08405252
occ loss 0.23993738
time for this batch 0.4557466506958008
----------------------------------
train loss for this epoch:  0.241327
time for this epoch 70.21272778511047
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  30
# batch:  124
i_batch:  0.0
the loss for this batch:  0.31835616
flow loss 0.08523356
occ loss 0.23311856
time for this batch 0.39962005615234375
----------------------------------
i_batch:  64.0
the loss for this batch:  0.27211916
flow loss 0.07518705
occ loss 0.19692853
time for this batch 0.3466031551361084
----------------------------------
train loss for this epoch:  0.241077
time for this epoch 66.04447984695435
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  31
# batch:  124
i_batch:  0.0
the loss for this batch:  0.3011439
flow loss 0.07968054
occ loss 0.22145936
time for this batch 0.3955063819885254
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2581313
flow loss 0.07037145
occ loss 0.18775672
time for this batch 0.4336435794830322
----------------------------------
train loss for this epoch:  0.239229
time for this epoch 64.47346448898315
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  32
# batch:  124
i_batch:  0.0
the loss for this batch:  0.19770116
flow loss 0.05062636
occ loss 0.14707288
time for this batch 0.35108494758605957
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21681209
flow loss 0.060616545
occ loss 0.15619282
time for this batch 0.3938777446746826
----------------------------------
train loss for this epoch:  0.236086
time for this epoch 63.49710941314697
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  33
# batch:  124
i_batch:  0.0
the loss for this batch:  0.242918
flow loss 0.066263765
occ loss 0.1766513
time for this batch 0.3493030071258545
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21546519
flow loss 0.059429023
occ loss 0.15603371
time for this batch 0.4096810817718506
----------------------------------
train loss for this epoch:  0.235828
time for this epoch 63.68162393569946
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  34
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2596576
flow loss 0.069988005
occ loss 0.18966678
time for this batch 0.3701791763305664
----------------------------------
i_batch:  64.0
the loss for this batch:  0.25243866
flow loss 0.068119094
occ loss 0.18431666
time for this batch 0.4254179000854492
----------------------------------
train loss for this epoch:  0.23697
time for this epoch 64.32600164413452
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  35
# batch:  124
i_batch:  0.0
the loss for this batch:  0.27059633
flow loss 0.073205486
occ loss 0.19738734
time for this batch 0.3563973903656006
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21973556
flow loss 0.0642729
occ loss 0.15545996
time for this batch 0.42598986625671387
----------------------------------
train loss for this epoch:  0.234385
time for this epoch 63.46278715133667
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  36
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23800361
flow loss 0.06321205
occ loss 0.17478888
time for this batch 0.33332347869873047
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24006633
flow loss 0.07001167
occ loss 0.17005186
time for this batch 0.3976266384124756
----------------------------------
train loss for this epoch:  0.23355
time for this epoch 64.43727254867554
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  37
# batch:  124
i_batch:  0.0
the loss for this batch:  0.18172872
flow loss 0.052797403
occ loss 0.12892908
time for this batch 0.3683168888092041
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20891164
flow loss 0.0575725
occ loss 0.15133661
time for this batch 0.3508412837982178
----------------------------------
train loss for this epoch:  0.234249
time for this epoch 65.18748092651367
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  38
# batch:  124
i_batch:  0.0
the loss for this batch:  0.32985097
flow loss 0.08433094
occ loss 0.24551633
time for this batch 0.401993989944458
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2883162
flow loss 0.07811826
occ loss 0.21019426
time for this batch 0.42810606956481934
----------------------------------
train loss for this epoch:  0.232391
time for this epoch 65.4538459777832
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  39
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24272805
flow loss 0.06513733
occ loss 0.17758751
time for this batch 0.3680739402770996
----------------------------------
i_batch:  64.0
the loss for this batch:  0.32060355
flow loss 0.08057562
occ loss 0.24002427
time for this batch 0.4500746726989746
----------------------------------
train loss for this epoch:  0.233282
time for this epoch 63.88539910316467
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  40
# batch:  124
i_batch:  0.0
the loss for this batch:  0.25872058
flow loss 0.07232709
occ loss 0.18639052
time for this batch 0.34786105155944824
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17480801
flow loss 0.05175481
occ loss 0.123050906
time for this batch 0.44106411933898926
----------------------------------
train loss for this epoch:  0.2327
time for this epoch 64.7789409160614
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  41
# batch:  124
i_batch:  0.0
the loss for this batch:  0.21076715
flow loss 0.058388114
occ loss 0.15237683
time for this batch 0.341672420501709
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21167353
flow loss 0.06141482
occ loss 0.15025611
time for this batch 0.43860721588134766
----------------------------------
train loss for this epoch:  0.230426
time for this epoch 64.25742530822754
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  42
# batch:  124
i_batch:  0.0
the loss for this batch:  0.18990739
flow loss 0.053275097
occ loss 0.13663013
time for this batch 0.41446971893310547
----------------------------------
i_batch:  64.0
the loss for this batch:  0.26890188
flow loss 0.07273486
occ loss 0.19616394
time for this batch 0.5202672481536865
----------------------------------
train loss for this epoch:  0.23082
time for this epoch 64.41082382202148
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  43
# batch:  124
i_batch:  0.0
the loss for this batch:  0.25010574
flow loss 0.07520906
occ loss 0.17489398
time for this batch 0.3725762367248535
----------------------------------
i_batch:  64.0
the loss for this batch:  0.32665837
flow loss 0.08441556
occ loss 0.24223883
time for this batch 0.42867231369018555
----------------------------------
train loss for this epoch:  0.231004
time for this epoch 67.20598864555359
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  44
# batch:  124
i_batch:  0.0
the loss for this batch:  0.19635737
flow loss 0.06067921
occ loss 0.13567542
time for this batch 0.39113616943359375
----------------------------------
i_batch:  64.0
the loss for this batch:  0.25014028
flow loss 0.066013284
occ loss 0.18412378
time for this batch 0.4123568534851074
----------------------------------
train loss for this epoch:  0.230145
time for this epoch 64.10445022583008
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  45
# batch:  124
i_batch:  0.0
the loss for this batch:  0.31754395
flow loss 0.07883849
occ loss 0.2387017
time for this batch 0.34261441230773926
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18968451
flow loss 0.05161058
occ loss 0.13807185
time for this batch 0.41130948066711426
----------------------------------
train loss for this epoch:  0.229636
time for this epoch 64.36573147773743
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  46
# batch:  124
i_batch:  0.0
the loss for this batch:  0.21825178
flow loss 0.061199468
occ loss 0.15704957
time for this batch 0.3446340560913086
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22188511
flow loss 0.060716923
occ loss 0.16116546
time for this batch 0.4124178886413574
----------------------------------
train loss for this epoch:  0.229313
time for this epoch 62.814361810684204
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  47
# batch:  124
i_batch:  0.0
the loss for this batch:  0.19383326
flow loss 0.055853937
occ loss 0.13797687
time for this batch 0.35428786277770996
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20427732
flow loss 0.062209178
occ loss 0.14206594
time for this batch 0.43318748474121094
----------------------------------
train loss for this epoch:  0.232253
time for this epoch 64.99364304542542
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  48
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20179757
flow loss 0.057495087
occ loss 0.14430018
time for this batch 0.36868858337402344
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2616205
flow loss 0.06864457
occ loss 0.19297336
time for this batch 0.4434854984283447
----------------------------------
train loss for this epoch:  0.228834
time for this epoch 61.86514902114868
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  49
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24282022
flow loss 0.0672114
occ loss 0.17560552
time for this batch 0.33675146102905273
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24381872
flow loss 0.06844699
occ loss 0.17536825
time for this batch 0.3940401077270508
----------------------------------
train loss for this epoch:  0.227468
time for this epoch 63.64828038215637
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  50
# batch:  124
i_batch:  0.0
the loss for this batch:  0.29709285
flow loss 0.071635105
occ loss 0.22545421
time for this batch 0.3619868755340576
----------------------------------
i_batch:  64.0
the loss for this batch:  0.30997446
flow loss 0.07217752
occ loss 0.23779386
time for this batch 0.4433279037475586
----------------------------------
train loss for this epoch:  0.228276
time for this epoch 64.01695442199707
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  51
# batch:  124
i_batch:  0.0
the loss for this batch:  0.22545177
flow loss 0.06674571
occ loss 0.15870291
time for this batch 0.359757661819458
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2810832
flow loss 0.07387729
occ loss 0.20720235
time for this batch 0.44173598289489746
----------------------------------
train loss for this epoch:  0.226622
time for this epoch 66.42050504684448
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  52
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20039037
flow loss 0.05416176
occ loss 0.14622593
time for this batch 0.3487720489501953
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14702086
flow loss 0.049383666
occ loss 0.09763548
time for this batch 0.42950868606567383
----------------------------------
train loss for this epoch:  0.226958
time for this epoch 64.27875185012817
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  53
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23997346
flow loss 0.062373467
occ loss 0.17759702
time for this batch 0.4104342460632324
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23941071
flow loss 0.063444346
occ loss 0.17596349
time for this batch 0.37542152404785156
----------------------------------
train loss for this epoch:  0.226078
time for this epoch 63.940758228302
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  54
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23415753
flow loss 0.06279874
occ loss 0.17135552
time for this batch 0.36652207374572754
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16437222
flow loss 0.051419232
occ loss 0.11295083
time for this batch 0.42168307304382324
----------------------------------
train loss for this epoch:  0.227415
time for this epoch 65.6561291217804
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  55
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23985972
flow loss 0.060905688
occ loss 0.17895134
time for this batch 0.41462278366088867
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21243352
flow loss 0.05765971
occ loss 0.15477121
time for this batch 0.4129960536956787
----------------------------------
train loss for this epoch:  0.225737
time for this epoch 66.59704542160034
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  56
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24480645
flow loss 0.062685244
occ loss 0.18211806
time for this batch 0.35903358459472656
----------------------------------
i_batch:  64.0
the loss for this batch:  0.29190022
flow loss 0.074007355
occ loss 0.21788949
time for this batch 0.39679622650146484
----------------------------------
train loss for this epoch:  0.225858
time for this epoch 64.35539102554321
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  57
# batch:  124
i_batch:  0.0
the loss for this batch:  0.22218587
flow loss 0.060616266
occ loss 0.1615668
time for this batch 0.3524298667907715
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17593156
flow loss 0.05190058
occ loss 0.12402888
time for this batch 0.39269089698791504
----------------------------------
train loss for this epoch:  0.225345
time for this epoch 62.3412561416626
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  58
# batch:  124
i_batch:  0.0
the loss for this batch:  0.18208057
flow loss 0.054353576
occ loss 0.12772483
time for this batch 0.3690650463104248
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24817094
flow loss 0.06573516
occ loss 0.18243329
time for this batch 0.40694308280944824
----------------------------------
train loss for this epoch:  0.224471
time for this epoch 64.46356177330017
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  59
# batch:  124
i_batch:  0.0
the loss for this batch:  0.1727084
flow loss 0.0522848
occ loss 0.12042116
time for this batch 0.34128499031066895
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22629435
flow loss 0.05933137
occ loss 0.16696006
time for this batch 0.41646385192871094
----------------------------------
train loss for this epoch:  0.225295
time for this epoch 64.17399549484253
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  60
# batch:  124
i_batch:  0.0
the loss for this batch:  0.25879002
flow loss 0.06858427
occ loss 0.19020276
time for this batch 0.3576481342315674
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22250508
flow loss 0.059068415
occ loss 0.16343378
time for this batch 0.4192972183227539
----------------------------------
train loss for this epoch:  0.224667
time for this epoch 64.34464025497437
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  61
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20915931
flow loss 0.060053244
occ loss 0.14910342
time for this batch 0.3416423797607422
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16592883
flow loss 0.04606413
occ loss 0.11986268
time for this batch 0.45147109031677246
----------------------------------
train loss for this epoch:  0.226334
time for this epoch 65.75423264503479
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  62
# batch:  124
i_batch:  0.0
the loss for this batch:  0.15180781
flow loss 0.048779298
occ loss 0.10302654
time for this batch 0.3544321060180664
----------------------------------
i_batch:  64.0
the loss for this batch:  0.25771496
flow loss 0.06799482
occ loss 0.18971731
time for this batch 0.3768117427825928
----------------------------------
train loss for this epoch:  0.223232
time for this epoch 63.73007249832153
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  63
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2380642
flow loss 0.06795489
occ loss 0.17010573
time for this batch 0.353867769241333
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20305862
flow loss 0.054985866
occ loss 0.14807028
time for this batch 0.3829801082611084
----------------------------------
train loss for this epoch:  0.223967
time for this epoch 65.26282024383545
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  64
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2234475
flow loss 0.061889693
occ loss 0.16155514
time for this batch 0.3973197937011719
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22400141
flow loss 0.057268668
occ loss 0.16673
time for this batch 0.3771805763244629
----------------------------------
train loss for this epoch:  0.224949
time for this epoch 60.672324419021606
No_decrease:  8
----------------an epoch starts-------------------
i_epoch:  65
# batch:  124
i_batch:  0.0
the loss for this batch:  0.1987433
flow loss 0.05787281
occ loss 0.14086765
time for this batch 0.37136387825012207
----------------------------------
i_batch:  64.0
the loss for this batch:  0.090994075
flow loss 0.030529562
occ loss 0.060463462
time for this batch 0.3760867118835449
----------------------------------
train loss for this epoch:  0.222711
time for this epoch 63.15388751029968
No_decrease:  9
----------------an epoch starts-------------------
i_epoch:  66
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23881267
flow loss 0.06711008
occ loss 0.17169966
time for this batch 0.35690736770629883
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24557735
flow loss 0.06349233
occ loss 0.18208203
time for this batch 0.4288156032562256
----------------------------------
train loss for this epoch:  0.22258
time for this epoch 63.93661284446716
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  67
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2329667
flow loss 0.06546122
occ loss 0.16750237
time for this batch 0.37597060203552246
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21284042
flow loss 0.057637293
occ loss 0.15520035
time for this batch 0.4234621524810791
----------------------------------
train loss for this epoch:  0.221841
time for this epoch 63.925387382507324
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  68
# batch:  124
i_batch:  0.0
the loss for this batch:  0.17003407
flow loss 0.051116023
occ loss 0.11891582
time for this batch 0.3694424629211426
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17708851
flow loss 0.04874417
occ loss 0.12834242
time for this batch 0.442047119140625
----------------------------------
train loss for this epoch:  0.221931
time for this epoch 65.47655534744263
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  69
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2429203
flow loss 0.063971564
occ loss 0.17894572
time for this batch 0.36678290367126465
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24554671
flow loss 0.07185016
occ loss 0.17369358
time for this batch 0.47247767448425293
----------------------------------
train loss for this epoch:  0.222488
time for this epoch 70.65302109718323
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  70
# batch:  124
i_batch:  0.0
the loss for this batch:  0.21326977
flow loss 0.052657288
occ loss 0.16060972
time for this batch 0.3832573890686035
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1948736
flow loss 0.053702876
occ loss 0.14116804
time for this batch 0.41259169578552246
----------------------------------
train loss for this epoch:  0.221377
time for this epoch 62.92936301231384
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  71
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23262754
flow loss 0.0626507
occ loss 0.16997352
time for this batch 0.38780760765075684
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19773674
flow loss 0.055439048
occ loss 0.14229491
time for this batch 0.42720580101013184
----------------------------------
train loss for this epoch:  0.221235
time for this epoch 64.52171993255615
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  72
# batch:  124
i_batch:  0.0
the loss for this batch:  0.25108436
flow loss 0.06422844
occ loss 0.18685251
time for this batch 0.35484886169433594
----------------------------------
i_batch:  64.0
the loss for this batch:  0.25639877
flow loss 0.06519522
occ loss 0.19120015
time for this batch 0.451404333114624
----------------------------------
train loss for this epoch:  0.221026
time for this epoch 66.23411297798157
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  73
# batch:  124
i_batch:  0.0
the loss for this batch:  0.18100522
flow loss 0.0488426
occ loss 0.13216057
time for this batch 0.37381505966186523
----------------------------------
i_batch:  64.0
the loss for this batch:  0.25943074
flow loss 0.07017973
occ loss 0.1892476
time for this batch 0.4412243366241455
----------------------------------
train loss for this epoch:  0.222905
time for this epoch 67.97548031806946
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  74
# batch:  124
i_batch:  0.0
the loss for this batch:  0.22754382
flow loss 0.06316583
occ loss 0.16437514
time for this batch 0.3749122619628906
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20631143
flow loss 0.056534827
occ loss 0.14977404
time for this batch 0.45804500579833984
----------------------------------
train loss for this epoch:  0.221646
time for this epoch 64.60028123855591
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  75
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24882598
flow loss 0.06757999
occ loss 0.18124266
time for this batch 0.40062975883483887
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23938765
flow loss 0.06436748
occ loss 0.17501722
time for this batch 0.41448283195495605
----------------------------------
train loss for this epoch:  0.221442
time for this epoch 63.79179644584656
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  76
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2532499
flow loss 0.06299437
occ loss 0.19025263
time for this batch 0.3424363136291504
----------------------------------
i_batch:  64.0
the loss for this batch:  0.25098583
flow loss 0.065301284
occ loss 0.18568149
time for this batch 0.4272444248199463
----------------------------------
train loss for this epoch:  0.219318
time for this epoch 64.6559317111969
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  77
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2567254
flow loss 0.070549265
occ loss 0.18617266
time for this batch 0.37239742279052734
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2302971
flow loss 0.06489652
occ loss 0.16539744
time for this batch 0.41963982582092285
----------------------------------
train loss for this epoch:  0.219298
time for this epoch 64.1589424610138
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  78
# batch:  124
i_batch:  0.0
the loss for this batch:  0.21141283
flow loss 0.05706315
occ loss 0.1543468
time for this batch 0.36356616020202637
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20881999
flow loss 0.05838967
occ loss 0.15042749
time for this batch 0.47615480422973633
----------------------------------
train loss for this epoch:  0.219796
time for this epoch 62.833232402801514
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  79
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24639542
flow loss 0.06243541
occ loss 0.18395688
time for this batch 0.42441821098327637
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2164835
flow loss 0.059195798
occ loss 0.15728515
time for this batch 0.42631101608276367
----------------------------------
train loss for this epoch:  0.220574
time for this epoch 65.04839754104614
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  80
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2239836
flow loss 0.062237117
occ loss 0.1617431
time for this batch 0.36685752868652344
----------------------------------
i_batch:  64.0
the loss for this batch:  0.26244274
flow loss 0.07084959
occ loss 0.19159007
time for this batch 0.41624927520751953
----------------------------------
train loss for this epoch:  0.219851
time for this epoch 70.93636417388916
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  81
# batch:  124
i_batch:  0.0
the loss for this batch:  0.18277924
flow loss 0.050908178
occ loss 0.13186878
time for this batch 0.3647747039794922
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14055707
flow loss 0.046028044
occ loss 0.09452732
time for this batch 0.3770768642425537
----------------------------------
train loss for this epoch:  0.219437
time for this epoch 90.48381471633911
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  82
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24789946
flow loss 0.06252426
occ loss 0.18537217
time for this batch 0.30913329124450684
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2112771
flow loss 0.059794158
occ loss 0.1514801
time for this batch 0.5587899684906006
----------------------------------
train loss for this epoch:  0.221147
time for this epoch 78.91724157333374
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  83
# batch:  124
i_batch:  0.0
the loss for this batch:  0.25716075
flow loss 0.0709797
occ loss 0.18617764
time for this batch 0.41036319732666016
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2560681
flow loss 0.06510848
occ loss 0.19095698
time for this batch 0.43028807640075684
----------------------------------
train loss for this epoch:  0.218543
time for this epoch 63.35146641731262
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  84
# batch:  124
i_batch:  0.0
the loss for this batch:  0.17471504
flow loss 0.048832744
occ loss 0.12588045
time for this batch 0.3431830406188965
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16349354
flow loss 0.04813181
occ loss 0.11535969
time for this batch 0.43465256690979004
----------------------------------
train loss for this epoch:  0.217954
time for this epoch 65.75078654289246
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  85
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20854557
flow loss 0.05698055
occ loss 0.15156257
time for this batch 0.39388084411621094
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21781945
flow loss 0.0552953
occ loss 0.16252157
time for this batch 0.4207339286804199
----------------------------------
train loss for this epoch:  0.218987
time for this epoch 65.59128332138062
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  86
# batch:  124
i_batch:  0.0
the loss for this batch:  0.26038668
flow loss 0.0699394
occ loss 0.19044374
time for this batch 0.3349316120147705
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1433255
flow loss 0.045341212
occ loss 0.097982295
time for this batch 0.40821242332458496
----------------------------------
train loss for this epoch:  0.21755
time for this epoch 64.3962414264679
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  87
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2672627
flow loss 0.0666774
occ loss 0.20058188
time for this batch 0.3589339256286621
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1777343
flow loss 0.052631833
occ loss 0.12510005
time for this batch 0.41356992721557617
----------------------------------
train loss for this epoch:  0.219121
time for this epoch 64.47300028800964
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  88
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24198037
flow loss 0.0635917
occ loss 0.17838544
time for this batch 0.3660879135131836
----------------------------------
i_batch:  64.0
the loss for this batch:  0.27403998
flow loss 0.06440083
occ loss 0.20963584
time for this batch 0.42774105072021484
----------------------------------
train loss for this epoch:  0.217492
time for this epoch 64.44335675239563
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  89
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24994382
flow loss 0.067025095
occ loss 0.18291529
time for this batch 0.36452174186706543
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18064341
flow loss 0.05052193
occ loss 0.13011903
time for this batch 0.3694436550140381
----------------------------------
train loss for this epoch:  0.217619
time for this epoch 63.70642828941345
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  90
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23467888
flow loss 0.0649395
occ loss 0.16973618
time for this batch 0.37815165519714355
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16951062
flow loss 0.052678965
occ loss 0.1168292
time for this batch 0.4568145275115967
----------------------------------
train loss for this epoch:  0.218123
time for this epoch 63.487713098526
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  91
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24772969
flow loss 0.065608084
occ loss 0.18211852
time for this batch 0.34881162643432617
----------------------------------
i_batch:  64.0
the loss for this batch:  0.26216236
flow loss 0.06942329
occ loss 0.19273593
time for this batch 0.4221162796020508
----------------------------------
train loss for this epoch:  0.217949
time for this epoch 64.52759408950806
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  92
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24498338
flow loss 0.06396174
occ loss 0.18101859
time for this batch 0.33783841133117676
----------------------------------
i_batch:  64.0
the loss for this batch:  0.25558364
flow loss 0.06614224
occ loss 0.18943809
time for this batch 0.437638521194458
----------------------------------
train loss for this epoch:  0.2173
time for this epoch 63.21777033805847
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  93
# batch:  124
i_batch:  0.0
the loss for this batch:  0.14882137
flow loss 0.04374006
occ loss 0.105079494
time for this batch 0.3658754825592041
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24108885
flow loss 0.06520323
occ loss 0.17588232
time for this batch 0.4011569023132324
----------------------------------
train loss for this epoch:  0.217513
time for this epoch 64.43642544746399
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  94
# batch:  124
i_batch:  0.0
the loss for this batch:  0.21079217
flow loss 0.057765912
occ loss 0.15302321
time for this batch 0.3817923069000244
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24285807
flow loss 0.0683254
occ loss 0.17452954
time for this batch 0.42262840270996094
----------------------------------
train loss for this epoch:  0.216174
time for this epoch 65.88479399681091
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  95
# batch:  124
i_batch:  0.0
the loss for this batch:  0.19536641
flow loss 0.052899893
occ loss 0.14246392
time for this batch 0.4195213317871094
----------------------------------
i_batch:  64.0
the loss for this batch:  0.25377244
flow loss 0.06786675
occ loss 0.18590218
time for this batch 0.42064857482910156
----------------------------------
train loss for this epoch:  0.218066
time for this epoch 64.65300798416138
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  96
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24350503
flow loss 0.06252372
occ loss 0.18097796
time for this batch 0.36910319328308105
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18334612
flow loss 0.0508809
occ loss 0.13246252
time for this batch 0.42620253562927246
----------------------------------
train loss for this epoch:  0.21585
time for this epoch 63.25399684906006
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  97
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2017233
flow loss 0.056182675
occ loss 0.14553767
time for this batch 0.38323211669921875
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24222258
flow loss 0.06238881
occ loss 0.1798306
time for this batch 0.4120936393737793
----------------------------------
train loss for this epoch:  0.216366
time for this epoch 62.70336294174194
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  98
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23798636
flow loss 0.060005967
occ loss 0.1779775
time for this batch 0.3928823471069336
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14164211
flow loss 0.04296834
occ loss 0.09867212
time for this batch 0.4261317253112793
----------------------------------
train loss for this epoch:  0.217242
time for this epoch 62.890560150146484
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  99
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2936978
flow loss 0.072620764
occ loss 0.2210733
time for this batch 0.3618040084838867
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17777193
flow loss 0.048562717
occ loss 0.12920672
time for this batch 0.38602137565612793
----------------------------------
train loss for this epoch:  0.21566
time for this epoch 63.830899238586426
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  100
# batch:  124
i_batch:  0.0
the loss for this batch:  0.21381983
flow loss 0.056692373
occ loss 0.15712447
time for this batch 0.3746654987335205
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2598974
flow loss 0.064536475
occ loss 0.1953577
time for this batch 0.42615389823913574
----------------------------------
train loss for this epoch:  0.214837
time for this epoch 64.69617700576782
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  101
# batch:  124
i_batch:  0.0
the loss for this batch:  0.21246402
flow loss 0.058914695
occ loss 0.15354629
time for this batch 0.3353245258331299
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21496193
flow loss 0.061826043
occ loss 0.15313299
time for this batch 0.39806699752807617
----------------------------------
train loss for this epoch:  0.216378
time for this epoch 63.09880566596985
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  102
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20209222
flow loss 0.05421726
occ loss 0.14787214
time for this batch 0.38831257820129395
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2663396
flow loss 0.07240526
occ loss 0.193931
time for this batch 0.3823130130767822
----------------------------------
train loss for this epoch:  0.217203
time for this epoch 64.37307834625244
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  103
# batch:  124
i_batch:  0.0
the loss for this batch:  0.16091003
flow loss 0.045628417
occ loss 0.115279645
time for this batch 0.3293178081512451
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22453776
flow loss 0.058568
occ loss 0.16596718
time for this batch 0.4544811248779297
----------------------------------
train loss for this epoch:  0.214411
time for this epoch 62.72291088104248
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  104
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20666511
flow loss 0.054446053
occ loss 0.15221624
time for this batch 0.36081933975219727
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1751381
flow loss 0.04794734
occ loss 0.12718819
time for this batch 0.39879393577575684
----------------------------------
train loss for this epoch:  0.214115
time for this epoch 62.125935077667236
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  105
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24645226
flow loss 0.06804368
occ loss 0.1784049
time for this batch 0.3735945224761963
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14986375
flow loss 0.046535708
occ loss 0.10332595
time for this batch 0.4029207229614258
----------------------------------
train loss for this epoch:  0.214418
time for this epoch 63.97379231452942
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  106
# batch:  124
i_batch:  0.0
the loss for this batch:  0.1594873
flow loss 0.04598225
occ loss 0.11350329
time for this batch 0.3959996700286865
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21782713
flow loss 0.06273955
occ loss 0.15508448
time for this batch 0.33363866806030273
----------------------------------
train loss for this epoch:  0.214009
time for this epoch 64.23186135292053
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  107
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20062116
flow loss 0.055711035
occ loss 0.14490771
time for this batch 0.37154555320739746
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1914297
flow loss 0.053312827
occ loss 0.13811436
time for this batch 0.36293649673461914
----------------------------------
train loss for this epoch:  0.213389
time for this epoch 64.60707974433899
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  108
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24204414
flow loss 0.06600581
occ loss 0.17603518
time for this batch 0.3536372184753418
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17829725
flow loss 0.053533547
occ loss 0.1247614
time for this batch 0.42937541007995605
----------------------------------
train loss for this epoch:  0.216524
time for this epoch 64.43332242965698
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  109
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20673995
flow loss 0.05615323
occ loss 0.15058367
time for this batch 0.3792073726654053
----------------------------------
i_batch:  64.0
the loss for this batch:  0.28941056
flow loss 0.07314937
occ loss 0.21625741
time for this batch 0.45015978813171387
----------------------------------
train loss for this epoch:  0.21396
time for this epoch 66.00393056869507
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  110
# batch:  124
i_batch:  0.0
the loss for this batch:  0.27440494
flow loss 0.06811375
occ loss 0.20628758
time for this batch 0.3572421073913574
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22531033
flow loss 0.060392033
occ loss 0.16491535
time for this batch 0.4055202007293701
----------------------------------
train loss for this epoch:  0.212603
time for this epoch 64.43120646476746
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  111
# batch:  124
i_batch:  0.0
the loss for this batch:  0.18521972
flow loss 0.0506422
occ loss 0.13457529
time for this batch 0.37344813346862793
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20482026
flow loss 0.058616206
occ loss 0.14620143
time for this batch 0.37374091148376465
----------------------------------
train loss for this epoch:  0.215001
time for this epoch 64.05410432815552
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  112
# batch:  124
i_batch:  0.0
the loss for this batch:  0.14251673
flow loss 0.038523242
occ loss 0.10399166
time for this batch 0.36662936210632324
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23092045
flow loss 0.059905328
occ loss 0.17101222
time for this batch 0.4206993579864502
----------------------------------
train loss for this epoch:  0.213278
time for this epoch 63.666606426239014
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  113
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24783549
flow loss 0.06307926
occ loss 0.18475333
time for this batch 0.3476274013519287
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15631752
flow loss 0.046571746
occ loss 0.10974369
time for this batch 0.43724775314331055
----------------------------------
train loss for this epoch:  0.213543
time for this epoch 63.931418657302856
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  114
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24583666
flow loss 0.06464058
occ loss 0.18119268
time for this batch 0.38300490379333496
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19352452
flow loss 0.054992218
occ loss 0.1385297
time for this batch 0.4163682460784912
----------------------------------
train loss for this epoch:  0.21362
time for this epoch 63.80172514915466
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  115
# batch:  124
i_batch:  0.0
the loss for this batch:  0.19152059
flow loss 0.057643306
occ loss 0.13387449
time for this batch 0.3579742908477783
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18646188
flow loss 0.05480038
occ loss 0.13165891
time for this batch 0.3592679500579834
----------------------------------
train loss for this epoch:  0.212537
time for this epoch 63.2809419631958
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  116
# batch:  124
i_batch:  0.0
the loss for this batch:  0.26449206
flow loss 0.070893444
occ loss 0.19359502
time for this batch 0.3414909839630127
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16508968
flow loss 0.050160483
occ loss 0.11492708
time for this batch 0.4393153190612793
----------------------------------
train loss for this epoch:  0.21285
time for this epoch 63.84475040435791
No_decrease:  8
----------------an epoch starts-------------------
i_epoch:  117
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23729403
flow loss 0.06634476
occ loss 0.1709463
time for this batch 0.36542320251464844
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1938456
flow loss 0.052014608
occ loss 0.14182839
time for this batch 0.39249563217163086
----------------------------------
train loss for this epoch:  0.215585
time for this epoch 64.81944870948792
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  118
# batch:  124
i_batch:  0.0
the loss for this batch:  0.21726498
flow loss 0.055753484
occ loss 0.16150856
time for this batch 0.3986480236053467
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16999643
flow loss 0.049389582
occ loss 0.1206046
time for this batch 0.3654143810272217
----------------------------------
train loss for this epoch:  0.212277
time for this epoch 58.830058336257935
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  119
# batch:  124
i_batch:  0.0
the loss for this batch:  0.17673735
flow loss 0.05495062
occ loss 0.121784426
time for this batch 0.37479734420776367
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19653517
flow loss 0.051798802
occ loss 0.14473343
time for this batch 0.3872499465942383
----------------------------------
train loss for this epoch:  0.211559
time for this epoch 55.46442008018494
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  120
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2563559
flow loss 0.063864015
occ loss 0.19248885
time for this batch 0.3847525119781494
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19378258
flow loss 0.050619133
occ loss 0.14316079
time for this batch 0.3590986728668213
----------------------------------
train loss for this epoch:  0.21169
time for this epoch 56.64624547958374
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  121
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23291159
flow loss 0.063200474
occ loss 0.16970733
time for this batch 0.3690633773803711
----------------------------------
i_batch:  64.0
the loss for this batch:  0.26988524
flow loss 0.07041722
occ loss 0.19946429
time for this batch 0.4008328914642334
----------------------------------
train loss for this epoch:  0.21235
time for this epoch 55.56580972671509
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  122
# batch:  124
i_batch:  0.0
the loss for this batch:  0.1601842
flow loss 0.046895944
occ loss 0.113286324
time for this batch 0.3821992874145508
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22062787
flow loss 0.05868803
occ loss 0.16193676
time for this batch 0.4092991352081299
----------------------------------
train loss for this epoch:  0.211471
time for this epoch 61.49746370315552
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  123
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20425737
flow loss 0.05698468
occ loss 0.14726973
time for this batch 0.37255334854125977
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23772638
flow loss 0.06482458
occ loss 0.17289852
time for this batch 0.41162109375
----------------------------------
train loss for this epoch:  0.21167
time for this epoch 61.30021691322327
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  124
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23141266
flow loss 0.06302873
occ loss 0.16838086
time for this batch 0.3486642837524414
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19742598
flow loss 0.05621269
occ loss 0.14121024
time for this batch 0.30992841720581055
----------------------------------
train loss for this epoch:  0.212272
time for this epoch 58.568278074264526
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  125
# batch:  124
i_batch:  0.0
the loss for this batch:  0.22574507
flow loss 0.05803236
occ loss 0.16770962
time for this batch 0.36562609672546387
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2433154
flow loss 0.061901964
occ loss 0.18141054
time for this batch 0.3951303958892822
----------------------------------
train loss for this epoch:  0.211744
time for this epoch 58.874972343444824
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  126
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2087513
flow loss 0.05419426
occ loss 0.15455434
time for this batch 0.35872602462768555
----------------------------------
i_batch:  64.0
the loss for this batch:  0.178696
flow loss 0.051883984
occ loss 0.12680934
time for this batch 0.42586827278137207
----------------------------------
train loss for this epoch:  0.209993
time for this epoch 58.25347280502319
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  127
# batch:  124
i_batch:  0.0
the loss for this batch:  0.21742555
flow loss 0.056826066
occ loss 0.16059646
time for this batch 0.32117795944213867
----------------------------------
i_batch:  64.0
the loss for this batch:  0.27724773
flow loss 0.07294938
occ loss 0.20429467
time for this batch 0.5063233375549316
----------------------------------
train loss for this epoch:  0.210375
time for this epoch 56.34231472015381
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  128
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2487323
flow loss 0.065648034
occ loss 0.18308082
time for this batch 0.3290388584136963
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24110787
flow loss 0.064633034
occ loss 0.17647156
time for this batch 0.38592529296875
----------------------------------
train loss for this epoch:  0.211516
time for this epoch 56.57375717163086
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  129
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20662975
flow loss 0.0546011
occ loss 0.152026
time for this batch 0.3309895992279053
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21364722
flow loss 0.05786028
occ loss 0.15578432
time for this batch 0.3214282989501953
----------------------------------
train loss for this epoch:  0.211107
time for this epoch 56.306612968444824
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  130
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20145021
flow loss 0.055447947
occ loss 0.14599955
time for this batch 0.3361365795135498
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22615156
flow loss 0.06159767
occ loss 0.16455059
time for this batch 0.3213961124420166
----------------------------------
train loss for this epoch:  0.209915
time for this epoch 56.32469820976257
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  131
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23562488
flow loss 0.06327883
occ loss 0.17234272
time for this batch 0.32899975776672363
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21874739
flow loss 0.05724965
occ loss 0.16149502
time for this batch 0.3407406806945801
----------------------------------
train loss for this epoch:  0.210434
time for this epoch 59.72876191139221
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  132
# batch:  124
i_batch:  0.0
the loss for this batch:  0.18971676
flow loss 0.04979682
occ loss 0.13991717
time for this batch 0.322329044342041
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20975265
flow loss 0.05819595
occ loss 0.15155402
time for this batch 0.3857383728027344
----------------------------------
train loss for this epoch:  0.211481
time for this epoch 55.785950899124146
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  133
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23508605
flow loss 0.06311491
occ loss 0.17196804
time for this batch 0.3412330150604248
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18694127
flow loss 0.055814847
occ loss 0.13112387
time for this batch 0.4170713424682617
----------------------------------
train loss for this epoch:  0.210119
time for this epoch 61.182568311691284
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  134
# batch:  124
i_batch:  0.0
the loss for this batch:  0.25390643
flow loss 0.064196825
occ loss 0.18970604
time for this batch 0.3599534034729004
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21167971
flow loss 0.05742862
occ loss 0.15424807
time for this batch 0.37728333473205566
----------------------------------
train loss for this epoch:  0.209398
time for this epoch 59.87407159805298
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  135
# batch:  124
i_batch:  0.0
the loss for this batch:  0.1283913
flow loss 0.03851745
occ loss 0.089871965
time for this batch 0.3229830265045166
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2059222
flow loss 0.054036215
occ loss 0.15188345
time for this batch 0.39731502532958984
----------------------------------
train loss for this epoch:  0.213667
time for this epoch 58.16420269012451
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  136
# batch:  124
i_batch:  0.0
the loss for this batch:  0.26863408
flow loss 0.074399404
occ loss 0.19423105
time for this batch 0.3361687660217285
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20852926
flow loss 0.05475616
occ loss 0.1537705
time for this batch 0.4274575710296631
----------------------------------
train loss for this epoch:  0.21022
time for this epoch 57.904603242874146
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  137
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24111238
flow loss 0.0673532
occ loss 0.17375565
time for this batch 0.3148152828216553
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2309718
flow loss 0.06056826
occ loss 0.17040049
time for this batch 0.40639472007751465
----------------------------------
train loss for this epoch:  0.208927
time for this epoch 57.74969267845154
No_decrease:  8
----------------an epoch starts-------------------
i_epoch:  138
# batch:  124
i_batch:  0.0
the loss for this batch:  0.18185465
flow loss 0.049618743
occ loss 0.1322336
time for this batch 0.3713679313659668
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17702945
flow loss 0.050120205
occ loss 0.12690732
time for this batch 0.41307568550109863
----------------------------------
train loss for this epoch:  0.209942
time for this epoch 55.91401815414429
No_decrease:  9
----------------an epoch starts-------------------
i_epoch:  139
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2628631
flow loss 0.0671072
occ loss 0.19575223
time for this batch 0.32476210594177246
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16200754
flow loss 0.05114904
occ loss 0.11085657
time for this batch 0.30687999725341797
----------------------------------
train loss for this epoch:  0.210415
time for this epoch 53.5046808719635
No_decrease:  10
----------------an epoch starts-------------------
i_epoch:  140
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20282146
flow loss 0.055188127
occ loss 0.14763069
time for this batch 0.30498790740966797
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22499761
flow loss 0.056701876
occ loss 0.16829264
time for this batch 0.3870532512664795
----------------------------------
train loss for this epoch:  0.208599
time for this epoch 54.768006563186646
No_decrease:  11
----------------an epoch starts-------------------
i_epoch:  141
# batch:  124
i_batch:  0.0
the loss for this batch:  0.19892313
flow loss 0.054397345
occ loss 0.144523
time for this batch 0.3192136287689209
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20922564
flow loss 0.05571004
occ loss 0.15351295
time for this batch 0.38930654525756836
----------------------------------
train loss for this epoch:  0.208711
time for this epoch 56.63638877868652
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  142
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23038302
flow loss 0.06111124
occ loss 0.16926871
time for this batch 0.33443760871887207
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2550668
flow loss 0.062696934
occ loss 0.19236614
time for this batch 0.38790225982666016
----------------------------------
train loss for this epoch:  0.209323
time for this epoch 55.44406247138977
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  143
# batch:  124
i_batch:  0.0
the loss for this batch:  0.19009264
flow loss 0.055263642
occ loss 0.13482639
time for this batch 0.3257451057434082
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23303059
flow loss 0.060912035
occ loss 0.17211527
time for this batch 0.3746650218963623
----------------------------------
train loss for this epoch:  0.209
time for this epoch 55.78240942955017
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  144
# batch:  124
i_batch:  0.0
the loss for this batch:  0.11835137
flow loss 0.0400748
occ loss 0.07827516
time for this batch 0.3313758373260498
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18327872
flow loss 0.047852326
occ loss 0.13542417
time for this batch 0.3848888874053955
----------------------------------
train loss for this epoch:  0.208211
time for this epoch 56.64357781410217
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  145
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2488807
flow loss 0.062212776
occ loss 0.1866642
time for this batch 0.38059353828430176
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24782771
flow loss 0.062110957
occ loss 0.18571348
time for this batch 0.38643336296081543
----------------------------------
train loss for this epoch:  0.208176
time for this epoch 56.599366664886475
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  146
# batch:  124
i_batch:  0.0
the loss for this batch:  0.29785857
flow loss 0.068120055
occ loss 0.22973475
time for this batch 0.34505224227905273
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22203387
flow loss 0.056877524
occ loss 0.16515368
time for this batch 0.3803422451019287
----------------------------------
train loss for this epoch:  0.209223
time for this epoch 55.51722049713135
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  147
# batch:  124
i_batch:  0.0
the loss for this batch:  0.25428492
flow loss 0.063405894
occ loss 0.19087556
time for this batch 0.3295407295227051
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2345484
flow loss 0.065785564
occ loss 0.1687591
time for this batch 0.35782599449157715
----------------------------------
train loss for this epoch:  0.208218
time for this epoch 54.59795594215393
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  148
# batch:  124
i_batch:  0.0
the loss for this batch:  0.17846173
flow loss 0.047206663
occ loss 0.13125277
time for this batch 0.315962553024292
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24633193
flow loss 0.06216304
occ loss 0.18416575
time for this batch 0.3864712715148926
----------------------------------
train loss for this epoch:  0.208218
time for this epoch 56.1541268825531
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  149
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24239168
flow loss 0.06047398
occ loss 0.18191461
time for this batch 0.321929931640625
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17228535
flow loss 0.05033914
occ loss 0.121943906
time for this batch 0.32518887519836426
----------------------------------
train loss for this epoch:  0.207781
time for this epoch 57.129225730895996
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  150
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2244454
flow loss 0.056937907
occ loss 0.16750416
time for this batch 0.31630468368530273
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18729609
flow loss 0.049582582
occ loss 0.13771072
time for this batch 0.35686755180358887
----------------------------------
train loss for this epoch:  0.201661
time for this epoch 55.684991121292114
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  151
# batch:  124
i_batch:  0.0
the loss for this batch:  0.1753339
flow loss 0.047320258
occ loss 0.12801105
time for this batch 0.32454872131347656
----------------------------------
i_batch:  64.0
the loss for this batch:  0.192692
flow loss 0.05265817
occ loss 0.14003113
time for this batch 0.2978541851043701
----------------------------------
train loss for this epoch:  0.200903
time for this epoch 53.048187494277954
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  152
# batch:  124
i_batch:  0.0
the loss for this batch:  0.26624894
flow loss 0.065849
occ loss 0.20039609
time for this batch 0.2990133762359619
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23105258
flow loss 0.060309287
occ loss 0.17074025
time for this batch 0.39438629150390625
----------------------------------
train loss for this epoch:  0.200432
time for this epoch 53.53631830215454
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  153
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20125723
flow loss 0.05198226
occ loss 0.14927202
time for this batch 0.323575496673584
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24994451
flow loss 0.06597289
occ loss 0.18396851
time for this batch 0.510509729385376
----------------------------------
train loss for this epoch:  0.200334
time for this epoch 56.9165403842926
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  154
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20785716
flow loss 0.053217836
occ loss 0.15463658
time for this batch 0.32592201232910156
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2232609
flow loss 0.06094235
occ loss 0.1623153
time for this batch 0.39625978469848633
----------------------------------
train loss for this epoch:  0.200436
time for this epoch 56.47639560699463
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  155
# batch:  124
i_batch:  0.0
the loss for this batch:  0.19218272
flow loss 0.048993252
occ loss 0.14318706
time for this batch 0.32420778274536133
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23999485
flow loss 0.061932057
occ loss 0.17805924
time for this batch 0.39332151412963867
----------------------------------
train loss for this epoch:  0.200201
time for this epoch 54.155601978302
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  156
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2069004
flow loss 0.05332841
occ loss 0.15356866
time for this batch 0.3404395580291748
----------------------------------
i_batch:  64.0
the loss for this batch:  0.26193628
flow loss 0.062174514
occ loss 0.19975832
time for this batch 0.3901705741882324
----------------------------------
train loss for this epoch:  0.199967
time for this epoch 55.23315477371216
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  157
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20144634
flow loss 0.05279195
occ loss 0.14865184
time for this batch 0.30121493339538574
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19671245
flow loss 0.05231777
occ loss 0.14439194
time for this batch 0.381575345993042
----------------------------------
train loss for this epoch:  0.200062
time for this epoch 54.60373258590698
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  158
# batch:  124
i_batch:  0.0
the loss for this batch:  0.17137113
flow loss 0.04477831
occ loss 0.12659039
time for this batch 0.3173866271972656
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21988295
flow loss 0.058177125
occ loss 0.16170266
time for this batch 0.3918344974517822
----------------------------------
train loss for this epoch:  0.199849
time for this epoch 57.546244859695435
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  159
# batch:  124
i_batch:  0.0
the loss for this batch:  0.19215323
flow loss 0.053069033
occ loss 0.13908127
time for this batch 0.31949782371520996
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1993109
flow loss 0.05238954
occ loss 0.14691879
time for this batch 0.3956947326660156
----------------------------------
train loss for this epoch:  0.200333
time for this epoch 56.44371843338013
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  160
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20478562
flow loss 0.053388152
occ loss 0.15139428
time for this batch 0.3165135383605957
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17709242
flow loss 0.047770016
occ loss 0.12932031
time for this batch 0.3697688579559326
----------------------------------
train loss for this epoch:  0.200065
time for this epoch 55.90036463737488
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  161
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20139644
flow loss 0.056214385
occ loss 0.1451792
time for this batch 0.323899507522583
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1383798
flow loss 0.037277464
occ loss 0.1011002
time for this batch 0.38036179542541504
----------------------------------
train loss for this epoch:  0.200046
time for this epoch 57.73889207839966
No_decrease:  8
----------------an epoch starts-------------------
i_epoch:  162
# batch:  124
i_batch:  0.0
the loss for this batch:  0.19735937
flow loss 0.05547396
occ loss 0.14188233
time for this batch 0.3212001323699951
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17293061
flow loss 0.04994178
occ loss 0.12298653
time for this batch 0.39752650260925293
----------------------------------
train loss for this epoch:  0.199723
time for this epoch 57.77620816230774
No_decrease:  9
----------------an epoch starts-------------------
i_epoch:  163
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2317263
flow loss 0.06256308
occ loss 0.16916
time for this batch 0.3033106327056885
----------------------------------
i_batch:  64.0
the loss for this batch:  0.28112763
flow loss 0.069777265
occ loss 0.21134649
time for this batch 0.3989837169647217
----------------------------------
train loss for this epoch:  0.199509
time for this epoch 57.44611048698425
No_decrease:  10
----------------an epoch starts-------------------
i_epoch:  164
# batch:  124
i_batch:  0.0
the loss for this batch:  0.28318048
flow loss 0.06697693
occ loss 0.21620008
time for this batch 0.3170745372772217
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2456922
flow loss 0.06381675
occ loss 0.1818718
time for this batch 0.3914933204650879
----------------------------------
train loss for this epoch:  0.199636
time for this epoch 57.572893142700195
No_decrease:  11
----------------an epoch starts-------------------
i_epoch:  165
# batch:  124
i_batch:  0.0
the loss for this batch:  0.16972029
flow loss 0.048434917
occ loss 0.12128296
time for this batch 0.3256263732910156
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21637535
flow loss 0.054467782
occ loss 0.16190444
time for this batch 0.3866543769836426
----------------------------------
train loss for this epoch:  0.199571
time for this epoch 56.720921993255615
No_decrease:  12
----------------an epoch starts-------------------
i_epoch:  166
# batch:  124
i_batch:  0.0
the loss for this batch:  0.108016655
flow loss 0.033408284
occ loss 0.074606895
time for this batch 0.32808971405029297
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1947549
flow loss 0.053989824
occ loss 0.14076224
time for this batch 0.3755362033843994
----------------------------------
train loss for this epoch:  0.199238
time for this epoch 55.81381440162659
No_decrease:  13
----------------an epoch starts-------------------
i_epoch:  167
# batch:  124
i_batch:  0.0
the loss for this batch:  0.15822677
flow loss 0.044752914
occ loss 0.11347144
time for this batch 0.3113250732421875
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1956548
flow loss 0.05289922
occ loss 0.14275245
time for this batch 0.38080644607543945
----------------------------------
train loss for this epoch:  0.199772
time for this epoch 56.0125253200531
No_decrease:  14
----------------an epoch starts-------------------
i_epoch:  168
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2194621
flow loss 0.058450915
occ loss 0.1610081
time for this batch 0.31364011764526367
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21686721
flow loss 0.053421393
occ loss 0.16344327
time for this batch 0.3056638240814209
----------------------------------
train loss for this epoch:  0.199704
time for this epoch 53.48667287826538
No_decrease:  15
----------------an epoch starts-------------------
i_epoch:  169
# batch:  124
i_batch:  0.0
the loss for this batch:  0.18125194
flow loss 0.048919376
occ loss 0.13232994
time for this batch 0.3183562755584717
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2178165
flow loss 0.05815923
occ loss 0.1596545
time for this batch 0.3759727478027344
----------------------------------
train loss for this epoch:  0.199398
time for this epoch 55.6176118850708
No_decrease:  16
----------------an epoch starts-------------------
i_epoch:  170
# batch:  124
i_batch:  0.0
the loss for this batch:  0.21079533
flow loss 0.054329745
occ loss 0.15646267
time for this batch 0.32271265983581543
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1437951
flow loss 0.03959429
occ loss 0.10419866
time for this batch 0.3952035903930664
----------------------------------
train loss for this epoch:  0.199356
time for this epoch 55.795676946640015
No_decrease:  17
----------------an epoch starts-------------------
i_epoch:  171
# batch:  124
i_batch:  0.0
the loss for this batch:  0.19128905
flow loss 0.055332437
occ loss 0.13595349
time for this batch 0.3120403289794922
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19430304
flow loss 0.053919017
occ loss 0.14038134
time for this batch 0.37479496002197266
----------------------------------
train loss for this epoch:  0.19926
time for this epoch 58.07227826118469
No_decrease:  18
----------------an epoch starts-------------------
i_epoch:  172
# batch:  124
i_batch:  0.0
the loss for this batch:  0.19409262
flow loss 0.051722232
occ loss 0.14236754
time for this batch 0.3156604766845703
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19595066
flow loss 0.04708612
occ loss 0.14886169
time for this batch 0.3868851661682129
----------------------------------
train loss for this epoch:  0.199789
time for this epoch 57.325305700302124
No_decrease:  19
----------------an epoch starts-------------------
i_epoch:  173
# batch:  124
i_batch:  0.0
the loss for this batch:  0.1838944
flow loss 0.048196007
occ loss 0.13569577
time for this batch 0.3226451873779297
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17407499
flow loss 0.047452502
occ loss 0.12662022
time for this batch 0.39611387252807617
----------------------------------
train loss for this epoch:  0.199076
time for this epoch 57.09229564666748
No_decrease:  20
----------------an epoch starts-------------------
i_epoch:  174
# batch:  124
i_batch:  0.0
the loss for this batch:  0.22511825
flow loss 0.058299985
occ loss 0.16681513
time for this batch 0.328416109085083
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17836024
flow loss 0.045010284
occ loss 0.13334721
time for this batch 0.29062557220458984
----------------------------------
train loss for this epoch:  0.198957
time for this epoch 56.195101499557495
No_decrease:  21
----------------an epoch starts-------------------
i_epoch:  175
# batch:  124
i_batch:  0.0
the loss for this batch:  0.17183615
flow loss 0.046829987
occ loss 0.12500353
time for this batch 0.3171260356903076
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22176304
flow loss 0.060318828
occ loss 0.16144116
time for this batch 0.3800315856933594
----------------------------------
train loss for this epoch:  0.198952
time for this epoch 55.91214919090271
No_decrease:  22
----------------an epoch starts-------------------
i_epoch:  176
# batch:  124
i_batch:  0.0
the loss for this batch:  0.18915385
flow loss 0.051168844
occ loss 0.13798212
time for this batch 0.32608938217163086
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1437366
flow loss 0.04008964
occ loss 0.10364496
time for this batch 0.33907055854797363
----------------------------------
train loss for this epoch:  0.199191
time for this epoch 55.730061769485474
No_decrease:  23
----------------an epoch starts-------------------
i_epoch:  177
# batch:  124
i_batch:  0.0
the loss for this batch:  0.17639445
flow loss 0.049493212
occ loss 0.1268987
time for this batch 0.3156003952026367
----------------------------------
i_batch:  64.0
the loss for this batch:  0.27735314
flow loss 0.06758655
occ loss 0.20976263
time for this batch 0.3895556926727295
----------------------------------
train loss for this epoch:  0.199935
time for this epoch 55.65227246284485
No_decrease:  24
----------------an epoch starts-------------------
i_epoch:  178
# batch:  124
i_batch:  0.0
the loss for this batch:  0.1476523
flow loss 0.04087297
occ loss 0.10677728
time for this batch 0.3224673271179199
----------------------------------
i_batch:  64.0
the loss for this batch:  0.25809944
flow loss 0.06386791
occ loss 0.19422823
time for this batch 0.3849151134490967
----------------------------------
train loss for this epoch:  0.199131
time for this epoch 54.78545117378235
No_decrease:  25
----------------an epoch starts-------------------
i_epoch:  179
# batch:  124
i_batch:  0.0
the loss for this batch:  0.21944323
flow loss 0.05887279
occ loss 0.16056746
time for this batch 0.31783342361450195
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20652843
flow loss 0.054528486
occ loss 0.15199685
time for this batch 0.382770299911499
----------------------------------
train loss for this epoch:  0.198878
time for this epoch 57.09069561958313
No_decrease:  26
----------------an epoch starts-------------------
i_epoch:  180
# batch:  124
i_batch:  0.0
the loss for this batch:  0.19052848
flow loss 0.05019858
occ loss 0.14032696
time for this batch 0.31153154373168945
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20551127
flow loss 0.05635726
occ loss 0.14915074
time for this batch 0.385603666305542
----------------------------------
train loss for this epoch:  0.198854
time for this epoch 57.08790469169617
No_decrease:  27
----------------an epoch starts-------------------
i_epoch:  181
# batch:  124
i_batch:  0.0
the loss for this batch:  0.16077527
flow loss 0.04334706
occ loss 0.1174257
time for this batch 0.3204672336578369
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23912345
flow loss 0.060803324
occ loss 0.1783168
time for this batch 0.385648250579834
----------------------------------
train loss for this epoch:  0.198855
time for this epoch 56.17385387420654
No_decrease:  28
----------------an epoch starts-------------------
i_epoch:  182
# batch:  124
i_batch:  0.0
the loss for this batch:  0.15562364
flow loss 0.040993232
occ loss 0.11462846
time for this batch 0.32282233238220215
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15093958
flow loss 0.043097693
occ loss 0.10783965
time for this batch 0.3839266300201416
----------------------------------
train loss for this epoch:  0.198908
time for this epoch 55.856362104415894
No_decrease:  29
----------------an epoch starts-------------------
i_epoch:  183
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23152345
flow loss 0.0559819
occ loss 0.17553829
time for this batch 0.3250880241394043
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1986061
flow loss 0.053061433
occ loss 0.14554173
time for this batch 0.389678955078125
----------------------------------
train loss for this epoch:  0.198721
time for this epoch 54.26301646232605
Early stop at the 184-th epoch

7: apply the model to vali and test¶

In [14]:
def apply_to_vali_test(model, vt, f_o_mean_std):
    f = vt["flow"]
    f_m = vt["flow_mask"].to(device)
    o = vt["occupancy"]
    o_m = vt["occupancy_mask"].to(device)
    f_mae, f_rmse, o_mae, o_rmse  = vali_test(model, f, f_m, o, o_m, f_o_mean_std, hyper["b_s_vt"])
    print ("flow_mae", f_mae)
    print ("flow_rmse", f_rmse)
    print ("occ_mae", o_mae)
    print ("occ_rmse", o_rmse)
    return f_mae, f_rmse, o_mae, o_rmse

Validate¶

In [15]:
vali_f_mae, vali_f_rmse, vali_o_mae, vali_o_rmse =\
    apply_to_vali_test(trained_model, vali, f_o_mean_std)
flow_mae 35.053272603131084
flow_rmse 54.09957841016035
occ_mae 0.03839110656226632
occ_rmse 0.07866730632577427

Test¶

In [16]:
test_f_mae, test_f_rmse, test_o_mae, test_o_rmse =\
    apply_to_vali_test(trained_model, test, f_o_mean_std)
flow_mae 33.34706943010906
flow_rmse 51.37163666432371
occ_mae 0.03153476901395547
occ_rmse 0.06751021108147849
In [ ]:
 
In [ ]:
 
In [ ]: